Sequence
int64
1
25.2k
Time
int64
1
858M
File
stringclasses
830 values
RangeOffset
int64
0
2.21M
RangeLength
int64
0
168k
Text
stringlengths
1
4.7M
Language
stringclasses
20 values
Type
stringclasses
9 values
1,688
2,105,367
TERMINAL
0
0
20888(B
null
terminal_output
1,689
2,106,390
TERMINAL
0
0
1999(B
null
terminal_output
1,690
2,107,359
TERMINAL
0
0
220302:00(B
null
terminal_output
1,691
2,108,438
TERMINAL
0
0
3111(B
null
terminal_output
1,692
2,109,415
TERMINAL
0
0
4222(B
null
terminal_output
1,693
2,110,443
TERMINAL
0
0
5333(B
null
terminal_output
1,694
2,111,483
TERMINAL
0
0
6444(B
null
terminal_output
1,695
2,112,528
TERMINAL
0
0
7555(B
null
terminal_output
1,696
2,113,559
TERMINAL
0
0
8666(B
null
terminal_output
1,697
2,114,583
TERMINAL
0
0
9777(B
null
terminal_output
1,698
2,115,607
TERMINAL
0
0
30888(B
null
terminal_output
1,699
2,116,630
TERMINAL
0
0
1999(B
null
terminal_output
1,700
2,117,649
TERMINAL
0
0
2304010(B
null
terminal_output
1,701
2,118,675
TERMINAL
0
0
3111(B
null
terminal_output
1,702
2,119,805
TERMINAL
0
0
4222(B
null
terminal_output
1,703
2,120,736
TERMINAL
0
0
5333(B
null
terminal_output
1,704
2,121,853
TERMINAL
0
0
6444(B
null
terminal_output
1,705
2,122,852
TERMINAL
0
0
7555(B
null
terminal_output
1,706
2,123,830
TERMINAL
0
0
8666(B
null
terminal_output
1,707
2,124,925
TERMINAL
0
0
9777(B
null
terminal_output
1,708
2,125,897
TERMINAL
0
0
40888(B
null
terminal_output
1,709
2,126,974
TERMINAL
0
0
1999(B
null
terminal_output
1,710
2,127,997
TERMINAL
0
0
2405020(B
null
terminal_output
1,711
2,128,978
TERMINAL
0
0
3111(B
null
terminal_output
1,712
2,130,049
TERMINAL
0
0
4222(B
null
terminal_output
1,713
2,131,036
TERMINAL
0
0
5333(B
null
terminal_output
1,714
2,132,083
TERMINAL
0
0
6444(B
null
terminal_output
1,715
2,133,091
TERMINAL
0
0
7555(B
null
terminal_output
1,716
2,134,114
TERMINAL
0
0
8777(B
null
terminal_output
1,717
2,135,172
TERMINAL
0
0
50888(B
null
terminal_output
1,718
2,136,157
TERMINAL
0
0
1999(B
null
terminal_output
1,719
2,137,188
TERMINAL
0
0
2506:0030(B
null
terminal_output
1,720
2,138,211
TERMINAL
0
0
3111(B
null
terminal_output
1,721
2,139,278
TERMINAL
0
0
4222(B
null
terminal_output
1,722
2,140,353
TERMINAL
0
0
5333(B
null
terminal_output
1,723
2,141,393
TERMINAL
0
0
6444(B
null
terminal_output
1,724
2,142,466
TERMINAL
0
0
7555(B
null
terminal_output
1,725
2,143,561
TERMINAL
0
0
8666(B
null
terminal_output
1,726
2,144,586
TERMINAL
0
0
9777(B
null
terminal_output
1,727
2,145,565
TERMINAL
0
0
5:00888(B
null
terminal_output
1,728
2,146,634
TERMINAL
0
0
1999(B
null
terminal_output
1,729
2,147,658
TERMINAL
0
0
28:001040(B
null
terminal_output
1,730
2,148,682
TERMINAL
0
0
3111(B
null
terminal_output
1,731
2,149,705
TERMINAL
0
0
4222(B
null
terminal_output
1,732
2,150,715
TERMINAL
0
0
5333(B
null
terminal_output
1,733
2,151,751
TERMINAL
0
0
6444(B
null
terminal_output
1,734
2,152,880
TERMINAL
0
0
7555(B
null
terminal_output
1,735
2,153,815
TERMINAL
0
0
8666(B
null
terminal_output
1,736
2,154,929
TERMINAL
0
0
9777(B
null
terminal_output
1,737
2,155,878
TERMINAL
0
0
10888(B
null
terminal_output
1,738
2,156,899
TERMINAL
0
0
1999(B
null
terminal_output
1,739
2,158,001
TERMINAL
0
0
2102050(B
null
terminal_output
1,740
2,158,953
TERMINAL
0
0
3111(B
null
terminal_output
1,741
2,159,979
TERMINAL
0
0
4222(B
null
terminal_output
1,742
2,161,007
TERMINAL
0
0
5333(B
null
terminal_output
1,743
2,162,042
TERMINAL
0
0
6444(B
null
terminal_output
1,744
2,163,128
TERMINAL
0
0
7555(B
null
terminal_output
1,745
2,164,145
TERMINAL
0
0
8666(B
null
terminal_output
1,746
2,165,127
TERMINAL
0
0
9888(B
null
terminal_output
1,747
2,166,158
TERMINAL
0
0
21999(B
null
terminal_output
1,748
2,167,175
TERMINAL
0
0
220303:00(B
null
terminal_output
1,749
2,168,206
TERMINAL
0
0
3111(B
null
terminal_output
1,750
2,169,233
TERMINAL
0
0
4222(B
null
terminal_output
1,751
2,170,262
TERMINAL
0
0
5333(B
null
terminal_output
1,752
2,171,291
TERMINAL
0
0
6444(B
null
terminal_output
1,753
2,172,322
TERMINAL
0
0
7555(B
null
terminal_output
1
3
utils/dataloader_new.py
0
0
import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntry:\n tf.config.experimental.set_visible_devices([], "GPU")\nexcept tf.errors.NotFoundError:\n pass\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n "height": tf.io.FixedLenFeature([], tf.int64),\n "width": tf.io.FixedLenFeature([], tf.int64),\n "channels": tf.io.FixedLenFeature([], tf.int64),\n "sequence_length": tf.io.FixedLenFeature([], tf.int64),\n "raw_video": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example["sequence_length"], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example["raw_video"], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """\n if not tfrecord_paths:\n raise ValueError("tfrecord_paths list cannot be empty.")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), "Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding."\n per_process_batch_size = global_batch_size // num_processes\n\n # Create a dataset of just the paths (filenames)\n path_dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n breakpoint()\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n
python
tab
2
196
extension-output-pdoom-org.crowd-code-#1-crowd-code
0
0
10:12:51 AM [info] Activating crowd-code\n10:12:51 AM [info] Recording started\n10:12:51 AM [info] Initializing git provider using file system watchers...\n10:12:51 AM [info] Git repository found\n10:12:51 AM [info] Git provider initialized successfully\n
Log
tab
3
279
extension-output-pdoom-org.crowd-code-#1-crowd-code
250
0
10:12:51 AM [info] Initial git state: [object Object]\n
Log
content
4
562
TERMINAL
0
0
/bin/python3 /usr/stud/mahajanm/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /usr/stud/mahajanm/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt
null
terminal_command
5
611
TERMINAL
0
0
]633;E;/bin/python3 /usr/stud/mahajanm/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /usr/stud/mahajanm/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;1789041c-cb33-42e3-8f25-5b94e0dc6928]633;C
null
terminal_output
6
1,710
utils/dataloader_new.py
0
0
null
python
tab
7
3,057
TERMINAL
0
0
queue
null
terminal_command
8
3,097
TERMINAL
0
0
\r\n[?2004l\r]633;E;watch -n1 squeue --me;b1f17683-4794-42de-a676-1cbb4f6ca243]633;C
null
terminal_output
9
7,473
TERMINAL
0
0
[?1049h(B[?7h]4;8;rgb:54/54/54\]4;9;rgb:FF/54/54\]4;10;rgb:54/FF/54\]4;11;rgb:FF/FF/54\]4;12;rgb:54/54/FF\]4;13;rgb:FF/54/FF\]4;14;rgb:54/FF/FF\(B]104[?1049l\r[?1l>]0;mahajanm@atcremers51: /usr/stud/mahajanm/Projects/jafar]633;D;0]633;P;Cwd=/usr/stud/mahajanm/Projects/jafar
null
terminal_output
2
2,413
TERMINAL
0
0
queue
null
terminal_command
3
2,443
TERMINAL
0
0
[?25l[?2004l\r]633;E;watch -n1 squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
4
8,529
TERMINAL
0
0
squeue --me
null
terminal_command
5
8,573
TERMINAL
0
0
[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
6
17,811
TERMINAL
0
0
time squeue --me
null
terminal_command
7
17,844
TERMINAL
0
0
[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
8
37,681
utils/dataloader_new.py
0
0
import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntry:\n tf.config.experimental.set_visible_devices([], "GPU")\nexcept tf.errors.NotFoundError:\n pass\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n "height": tf.io.FixedLenFeature([], tf.int64),\n "width": tf.io.FixedLenFeature([], tf.int64),\n "channels": tf.io.FixedLenFeature([], tf.int64),\n "sequence_length": tf.io.FixedLenFeature([], tf.int64),\n "raw_video": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example["sequence_length"], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example["raw_video"], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """\n if not tfrecord_paths:\n raise ValueError("tfrecord_paths list cannot be empty.")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), "Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding."\n per_process_batch_size = global_batch_size // num_processes\n\n # Create a dataset of just the paths (filenames)\n path_dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n breakpoint()\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n
python
tab
9
37,682
utils/dataloader_new.py
3,487
0
null
python
selection_mouse
10
38,347
utils/dataloader_new.py
3,657
0
null
python
selection_mouse
11
39,063
utils/dataloader_new.py
3,526
0
null
python
selection_mouse
12
39,065
utils/dataloader_new.py
3,525
0
null
python
selection_command
13
39,637
utils/dataloader_new.py
3,487
0
null
python
selection_mouse
14
99,817
utils/dataloader_new.py
4,112
0
null
python
selection_mouse
15
100,375
utils/dataloader_new.py
3,544
0
null
python
selection_mouse
16
100,858
utils/dataloader_new.py
3,487
0
null
python
selection_mouse
17
149,529
utils/dataloader_new.py
4,205
0
null
python
selection_mouse
18
149,531
utils/dataloader_new.py
4,204
0
null
python
selection_command
19
150,257
utils/dataloader.py
0
0
import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntry:\n tf.config.experimental.set_visible_devices([], "GPU")\nexcept tf.errors.NotFoundError:\n pass\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n "height": tf.io.FixedLenFeature([], tf.int64),\n "width": tf.io.FixedLenFeature([], tf.int64),\n "channels": tf.io.FixedLenFeature([], tf.int64),\n "sequence_length": tf.io.FixedLenFeature([], tf.int64),\n "raw_video": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example["sequence_length"], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example["raw_video"], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """\n if not tfrecord_paths:\n raise ValueError("tfrecord_paths list cannot be empty.")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), "Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding."\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n
python
tab
20
153,641
TERMINAL
0
0
time squeue --me
null
terminal_command
21
153,696
TERMINAL
0
0
[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
22
220,337
TERMINAL
0
0
time squeue --me
null
terminal_command
23
220,388
TERMINAL
0
0
[?25l[?2004l\r]633;E;squeue --me;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
24
275,710
TERMINAL
0
0
sbatch scripts/train_tokenizer_overfit_sample.sbatch
null
terminal_command
25
275,761
TERMINAL
0
0
[?25l\r]633;A(jafar) ]0;mahajanm@atcremers51: ~/Projects/jafarmahajanm@atcremers51:~/Projects/jafar$ ]633;Bsbatch scripts/train_tokenizer_overfit_sample.sbatch\r\n[?2004l\r]633;E;sbatch scripts/train_tokenizer_overfit_sample.sbatch;895d5730-3b47-4a5d-840c-5d137f58d793]633;C[?25h
null
terminal_output
26
412,543
utils/dataloader.py
0
0
null
python
tab