Spaces:
Sleeping
Sleeping
Delete tagging_dataloader_test.py
Browse files- tagging_dataloader_test.py +0 -82
tagging_dataloader_test.py
DELETED
|
@@ -1,82 +0,0 @@
|
|
| 1 |
-
# Copyright 2024 The TensorFlow Authors. All Rights Reserved.
|
| 2 |
-
#
|
| 3 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
| 4 |
-
# you may not use this file except in compliance with the License.
|
| 5 |
-
# You may obtain a copy of the License at
|
| 6 |
-
#
|
| 7 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
| 8 |
-
#
|
| 9 |
-
# Unless required by applicable law or agreed to in writing, software
|
| 10 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
| 11 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
| 12 |
-
# See the License for the specific language governing permissions and
|
| 13 |
-
# limitations under the License.
|
| 14 |
-
|
| 15 |
-
"""Tests for official.nlp.data.tagging_data_loader."""
|
| 16 |
-
import os
|
| 17 |
-
|
| 18 |
-
from absl.testing import parameterized
|
| 19 |
-
import numpy as np
|
| 20 |
-
import tensorflow as tf, tf_keras
|
| 21 |
-
|
| 22 |
-
from official.nlp.data import tagging_dataloader
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
def _create_fake_dataset(output_path, seq_length, include_sentence_id):
|
| 26 |
-
"""Creates a fake dataset."""
|
| 27 |
-
writer = tf.io.TFRecordWriter(output_path)
|
| 28 |
-
|
| 29 |
-
def create_int_feature(values):
|
| 30 |
-
f = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values)))
|
| 31 |
-
return f
|
| 32 |
-
|
| 33 |
-
for i in range(100):
|
| 34 |
-
features = {}
|
| 35 |
-
input_ids = np.random.randint(100, size=(seq_length))
|
| 36 |
-
features['input_ids'] = create_int_feature(input_ids)
|
| 37 |
-
features['input_mask'] = create_int_feature(np.ones_like(input_ids))
|
| 38 |
-
features['segment_ids'] = create_int_feature(np.ones_like(input_ids))
|
| 39 |
-
features['label_ids'] = create_int_feature(
|
| 40 |
-
np.random.randint(10, size=(seq_length)))
|
| 41 |
-
if include_sentence_id:
|
| 42 |
-
features['sentence_id'] = create_int_feature([i])
|
| 43 |
-
features['sub_sentence_id'] = create_int_feature([0])
|
| 44 |
-
|
| 45 |
-
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
|
| 46 |
-
writer.write(tf_example.SerializeToString())
|
| 47 |
-
writer.close()
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
class TaggingDataLoaderTest(tf.test.TestCase, parameterized.TestCase):
|
| 51 |
-
|
| 52 |
-
@parameterized.parameters(True, False)
|
| 53 |
-
def test_load_dataset(self, include_sentence_id):
|
| 54 |
-
seq_length = 16
|
| 55 |
-
batch_size = 10
|
| 56 |
-
train_data_path = os.path.join(self.get_temp_dir(), 'train.tf_record')
|
| 57 |
-
_create_fake_dataset(train_data_path, seq_length, include_sentence_id)
|
| 58 |
-
data_config = tagging_dataloader.TaggingDataConfig(
|
| 59 |
-
input_path=train_data_path,
|
| 60 |
-
seq_length=seq_length,
|
| 61 |
-
global_batch_size=batch_size,
|
| 62 |
-
include_sentence_id=include_sentence_id)
|
| 63 |
-
|
| 64 |
-
dataset = tagging_dataloader.TaggingDataLoader(data_config).load()
|
| 65 |
-
features, labels = next(iter(dataset))
|
| 66 |
-
|
| 67 |
-
expected_keys = ['input_word_ids', 'input_mask', 'input_type_ids']
|
| 68 |
-
if include_sentence_id:
|
| 69 |
-
expected_keys.extend(['sentence_id', 'sub_sentence_id'])
|
| 70 |
-
self.assertCountEqual(expected_keys, features.keys())
|
| 71 |
-
|
| 72 |
-
self.assertEqual(features['input_word_ids'].shape, (batch_size, seq_length))
|
| 73 |
-
self.assertEqual(features['input_mask'].shape, (batch_size, seq_length))
|
| 74 |
-
self.assertEqual(features['input_type_ids'].shape, (batch_size, seq_length))
|
| 75 |
-
self.assertEqual(labels.shape, (batch_size, seq_length))
|
| 76 |
-
if include_sentence_id:
|
| 77 |
-
self.assertEqual(features['sentence_id'].shape, (batch_size,))
|
| 78 |
-
self.assertEqual(features['sub_sentence_id'].shape, (batch_size,))
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
if __name__ == '__main__':
|
| 82 |
-
tf.test.main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|