Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/bigquery_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/bigquery_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/databricks_uc_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/delta_sharing_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/hudi_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/huggingface_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/iceberg_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/image_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/image_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/json_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/json_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/lance_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/mongo_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/mongo_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/numpy_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/parquet_bulk_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/sql_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/text_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/tfrecords_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/tfrecords_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/torch_datasource.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/webdataset_datasink.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/bigquery_datasource.py +118 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/image_datasource.py +175 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/json_datasource.py +139 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/lance_datasource.py +129 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/webdataset_datasource.py +365 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/optimizers.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/util.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__init__.py +16 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/logical_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/logical_plan.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/optimizer.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/physical_plan.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/plan.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/logical_operator.py +79 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/logical_plan.py +31 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/operator.py +58 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/optimizer.py +29 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/physical_plan.py +34 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/count_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/input_data_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/n_ary_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/one_to_one_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/read_operator.cpython-310.pyc +0 -0
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (183 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/bigquery_datasink.cpython-310.pyc
ADDED
|
Binary file (4.43 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/bigquery_datasource.cpython-310.pyc
ADDED
|
Binary file (3.79 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/databricks_uc_datasource.cpython-310.pyc
ADDED
|
Binary file (5.63 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/delta_sharing_datasource.cpython-310.pyc
ADDED
|
Binary file (4.46 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/hudi_datasource.cpython-310.pyc
ADDED
|
Binary file (2.86 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/huggingface_datasource.cpython-310.pyc
ADDED
|
Binary file (4.86 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/iceberg_datasource.cpython-310.pyc
ADDED
|
Binary file (7.67 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/image_datasink.cpython-310.pyc
ADDED
|
Binary file (1.22 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/image_datasource.cpython-310.pyc
ADDED
|
Binary file (5.72 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/json_datasink.cpython-310.pyc
ADDED
|
Binary file (1.66 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/json_datasource.cpython-310.pyc
ADDED
|
Binary file (3.95 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/lance_datasource.cpython-310.pyc
ADDED
|
Binary file (4.27 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/mongo_datasink.cpython-310.pyc
ADDED
|
Binary file (1.93 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/mongo_datasource.cpython-310.pyc
ADDED
|
Binary file (4.04 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/numpy_datasink.cpython-310.pyc
ADDED
|
Binary file (1.1 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/parquet_bulk_datasource.cpython-310.pyc
ADDED
|
Binary file (2.06 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/sql_datasink.cpython-310.pyc
ADDED
|
Binary file (1.5 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/text_datasource.cpython-310.pyc
ADDED
|
Binary file (1.57 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/tfrecords_datasink.cpython-310.pyc
ADDED
|
Binary file (5.07 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/tfrecords_datasource.cpython-310.pyc
ADDED
|
Binary file (12 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/torch_datasource.cpython-310.pyc
ADDED
|
Binary file (2.13 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/__pycache__/webdataset_datasink.cpython-310.pyc
ADDED
|
Binary file (1.99 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/bigquery_datasource.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import List, Optional
|
| 3 |
+
|
| 4 |
+
from ray.data._internal.util import _check_import
|
| 5 |
+
from ray.data.block import Block, BlockMetadata
|
| 6 |
+
from ray.data.datasource.datasource import Datasource, ReadTask
|
| 7 |
+
|
| 8 |
+
logger = logging.getLogger(__name__)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class BigQueryDatasource(Datasource):
|
| 12 |
+
def __init__(
|
| 13 |
+
self,
|
| 14 |
+
project_id: str,
|
| 15 |
+
dataset: Optional[str] = None,
|
| 16 |
+
query: Optional[str] = None,
|
| 17 |
+
):
|
| 18 |
+
_check_import(self, module="google.cloud", package="bigquery")
|
| 19 |
+
_check_import(self, module="google.cloud", package="bigquery_storage")
|
| 20 |
+
_check_import(self, module="google.api_core", package="exceptions")
|
| 21 |
+
|
| 22 |
+
self._project_id = project_id
|
| 23 |
+
self._dataset = dataset
|
| 24 |
+
self._query = query
|
| 25 |
+
|
| 26 |
+
if query is not None and dataset is not None:
|
| 27 |
+
raise ValueError(
|
| 28 |
+
"Query and dataset kwargs cannot both be provided "
|
| 29 |
+
+ "(must be mutually exclusive)."
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def get_read_tasks(self, parallelism: int) -> List[ReadTask]:
|
| 33 |
+
from google.cloud import bigquery, bigquery_storage
|
| 34 |
+
|
| 35 |
+
def _read_single_partition(stream) -> Block:
|
| 36 |
+
client = bigquery_storage.BigQueryReadClient()
|
| 37 |
+
reader = client.read_rows(stream.name)
|
| 38 |
+
return reader.to_arrow()
|
| 39 |
+
|
| 40 |
+
if self._query:
|
| 41 |
+
query_client = bigquery.Client(project=self._project_id)
|
| 42 |
+
query_job = query_client.query(self._query)
|
| 43 |
+
query_job.result()
|
| 44 |
+
destination = str(query_job.destination)
|
| 45 |
+
dataset_id = destination.split(".")[-2]
|
| 46 |
+
table_id = destination.split(".")[-1]
|
| 47 |
+
else:
|
| 48 |
+
self._validate_dataset_table_exist(self._project_id, self._dataset)
|
| 49 |
+
dataset_id = self._dataset.split(".")[0]
|
| 50 |
+
table_id = self._dataset.split(".")[1]
|
| 51 |
+
|
| 52 |
+
bqs_client = bigquery_storage.BigQueryReadClient()
|
| 53 |
+
table = f"projects/{self._project_id}/datasets/{dataset_id}/tables/{table_id}"
|
| 54 |
+
|
| 55 |
+
if parallelism == -1:
|
| 56 |
+
parallelism = None
|
| 57 |
+
requested_session = bigquery_storage.types.ReadSession(
|
| 58 |
+
table=table,
|
| 59 |
+
data_format=bigquery_storage.types.DataFormat.ARROW,
|
| 60 |
+
)
|
| 61 |
+
read_session = bqs_client.create_read_session(
|
| 62 |
+
parent=f"projects/{self._project_id}",
|
| 63 |
+
read_session=requested_session,
|
| 64 |
+
max_stream_count=parallelism,
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
read_tasks = []
|
| 68 |
+
logger.info("Created streams: " + str(len(read_session.streams)))
|
| 69 |
+
if len(read_session.streams) < parallelism:
|
| 70 |
+
logger.info(
|
| 71 |
+
"The number of streams created by the "
|
| 72 |
+
+ "BigQuery Storage Read API is less than the requested "
|
| 73 |
+
+ "parallelism due to the size of the dataset."
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
for stream in read_session.streams:
|
| 77 |
+
# Create a metadata block object to store schema, etc.
|
| 78 |
+
metadata = BlockMetadata(
|
| 79 |
+
num_rows=None,
|
| 80 |
+
size_bytes=None,
|
| 81 |
+
schema=None,
|
| 82 |
+
input_files=None,
|
| 83 |
+
exec_stats=None,
|
| 84 |
+
)
|
| 85 |
+
|
| 86 |
+
# Create the read task and pass the no-arg wrapper and metadata in
|
| 87 |
+
read_task = ReadTask(
|
| 88 |
+
lambda stream=stream: [_read_single_partition(stream)],
|
| 89 |
+
metadata,
|
| 90 |
+
)
|
| 91 |
+
read_tasks.append(read_task)
|
| 92 |
+
|
| 93 |
+
return read_tasks
|
| 94 |
+
|
| 95 |
+
def estimate_inmemory_data_size(self) -> Optional[int]:
|
| 96 |
+
return None
|
| 97 |
+
|
| 98 |
+
def _validate_dataset_table_exist(self, project_id: str, dataset: str) -> None:
|
| 99 |
+
from google.api_core import exceptions
|
| 100 |
+
from google.cloud import bigquery
|
| 101 |
+
|
| 102 |
+
client = bigquery.Client(project=project_id)
|
| 103 |
+
dataset_id = dataset.split(".")[0]
|
| 104 |
+
try:
|
| 105 |
+
client.get_dataset(dataset_id)
|
| 106 |
+
except exceptions.NotFound:
|
| 107 |
+
raise ValueError(
|
| 108 |
+
"Dataset {} is not found. Please ensure that it exists.".format(
|
| 109 |
+
dataset_id
|
| 110 |
+
)
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
try:
|
| 114 |
+
client.get_table(dataset)
|
| 115 |
+
except exceptions.NotFound:
|
| 116 |
+
raise ValueError(
|
| 117 |
+
"Table {} is not found. Please ensure that it exists.".format(dataset)
|
| 118 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/image_datasource.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import logging
|
| 3 |
+
import time
|
| 4 |
+
from typing import TYPE_CHECKING, Iterator, List, Optional, Tuple, Union
|
| 5 |
+
|
| 6 |
+
import numpy as np
|
| 7 |
+
|
| 8 |
+
from ray.data._internal.delegating_block_builder import DelegatingBlockBuilder
|
| 9 |
+
from ray.data._internal.util import _check_import
|
| 10 |
+
from ray.data.block import Block, BlockMetadata
|
| 11 |
+
from ray.data.datasource.file_based_datasource import FileBasedDatasource
|
| 12 |
+
from ray.data.datasource.file_meta_provider import DefaultFileMetadataProvider
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
import pyarrow
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
logger = logging.getLogger(__name__)
|
| 19 |
+
|
| 20 |
+
# The default size multiplier for reading image data source.
|
| 21 |
+
# This essentially is using image on-disk file size to estimate
|
| 22 |
+
# in-memory data size.
|
| 23 |
+
IMAGE_ENCODING_RATIO_ESTIMATE_DEFAULT = 1
|
| 24 |
+
|
| 25 |
+
# The lower bound value to estimate image encoding ratio.
|
| 26 |
+
IMAGE_ENCODING_RATIO_ESTIMATE_LOWER_BOUND = 0.5
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ImageDatasource(FileBasedDatasource):
|
| 30 |
+
"""A datasource that lets you read images."""
|
| 31 |
+
|
| 32 |
+
_WRITE_FILE_PER_ROW = True
|
| 33 |
+
_FILE_EXTENSIONS = ["png", "jpg", "jpeg", "tif", "tiff", "bmp", "gif"]
|
| 34 |
+
# Use 8 threads per task to read image files.
|
| 35 |
+
_NUM_THREADS_PER_TASK = 8
|
| 36 |
+
|
| 37 |
+
def __init__(
|
| 38 |
+
self,
|
| 39 |
+
paths: Union[str, List[str]],
|
| 40 |
+
size: Optional[Tuple[int, int]] = None,
|
| 41 |
+
mode: Optional[str] = None,
|
| 42 |
+
**file_based_datasource_kwargs,
|
| 43 |
+
):
|
| 44 |
+
super().__init__(paths, **file_based_datasource_kwargs)
|
| 45 |
+
|
| 46 |
+
_check_import(self, module="PIL", package="Pillow")
|
| 47 |
+
|
| 48 |
+
if size is not None and len(size) != 2:
|
| 49 |
+
raise ValueError(
|
| 50 |
+
"Expected `size` to contain two integers for height and width, "
|
| 51 |
+
f"but got {len(size)} integers instead."
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
if size is not None and (size[0] < 0 or size[1] < 0):
|
| 55 |
+
raise ValueError(
|
| 56 |
+
f"Expected `size` to contain positive integers, but got {size} instead."
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
self.size = size
|
| 60 |
+
self.mode = mode
|
| 61 |
+
|
| 62 |
+
meta_provider = file_based_datasource_kwargs.get("meta_provider", None)
|
| 63 |
+
if isinstance(meta_provider, ImageFileMetadataProvider):
|
| 64 |
+
self._encoding_ratio = self._estimate_files_encoding_ratio()
|
| 65 |
+
meta_provider._set_encoding_ratio(self._encoding_ratio)
|
| 66 |
+
else:
|
| 67 |
+
self._encoding_ratio = IMAGE_ENCODING_RATIO_ESTIMATE_DEFAULT
|
| 68 |
+
|
| 69 |
+
def _read_stream(
|
| 70 |
+
self,
|
| 71 |
+
f: "pyarrow.NativeFile",
|
| 72 |
+
path: str,
|
| 73 |
+
) -> Iterator[Block]:
|
| 74 |
+
from PIL import Image, UnidentifiedImageError
|
| 75 |
+
|
| 76 |
+
data = f.readall()
|
| 77 |
+
|
| 78 |
+
try:
|
| 79 |
+
image = Image.open(io.BytesIO(data))
|
| 80 |
+
except UnidentifiedImageError as e:
|
| 81 |
+
raise ValueError(f"PIL couldn't load image file at path '{path}'.") from e
|
| 82 |
+
|
| 83 |
+
if self.size is not None:
|
| 84 |
+
height, width = self.size
|
| 85 |
+
image = image.resize((width, height), resample=Image.BILINEAR)
|
| 86 |
+
if self.mode is not None:
|
| 87 |
+
image = image.convert(self.mode)
|
| 88 |
+
|
| 89 |
+
builder = DelegatingBlockBuilder()
|
| 90 |
+
array = np.array(image)
|
| 91 |
+
item = {"image": array}
|
| 92 |
+
builder.add(item)
|
| 93 |
+
block = builder.build()
|
| 94 |
+
|
| 95 |
+
yield block
|
| 96 |
+
|
| 97 |
+
def _rows_per_file(self):
|
| 98 |
+
return 1
|
| 99 |
+
|
| 100 |
+
def estimate_inmemory_data_size(self) -> Optional[int]:
|
| 101 |
+
total_size = 0
|
| 102 |
+
for file_size in self._file_sizes():
|
| 103 |
+
# NOTE: check if file size is not None, because some metadata provider
|
| 104 |
+
# such as FastFileMetadataProvider does not provide file size information.
|
| 105 |
+
if file_size is not None:
|
| 106 |
+
total_size += file_size
|
| 107 |
+
return total_size * self._encoding_ratio
|
| 108 |
+
|
| 109 |
+
def _estimate_files_encoding_ratio(self) -> float:
|
| 110 |
+
"""Return an estimate of the image files encoding ratio."""
|
| 111 |
+
start_time = time.perf_counter()
|
| 112 |
+
# Filter out empty file to avoid noise.
|
| 113 |
+
non_empty_path_and_size = list(
|
| 114 |
+
filter(lambda p: p[1] > 0, zip(self._paths(), self._file_sizes()))
|
| 115 |
+
)
|
| 116 |
+
num_files = len(non_empty_path_and_size)
|
| 117 |
+
if num_files == 0:
|
| 118 |
+
logger.warn(
|
| 119 |
+
"All input image files are empty. "
|
| 120 |
+
"Use on-disk file size to estimate images in-memory size."
|
| 121 |
+
)
|
| 122 |
+
return IMAGE_ENCODING_RATIO_ESTIMATE_DEFAULT
|
| 123 |
+
|
| 124 |
+
if self.size is not None and self.mode is not None:
|
| 125 |
+
# Use image size and mode to calculate data size for all images,
|
| 126 |
+
# because all images are homogeneous with same size after resizing.
|
| 127 |
+
# Resizing is enforced when reading every image in `ImageDatasource`
|
| 128 |
+
# when `size` argument is provided.
|
| 129 |
+
if self.mode in ["1", "L", "P"]:
|
| 130 |
+
dimension = 1
|
| 131 |
+
elif self.mode in ["RGB", "YCbCr", "LAB", "HSV"]:
|
| 132 |
+
dimension = 3
|
| 133 |
+
elif self.mode in ["RGBA", "CMYK", "I", "F"]:
|
| 134 |
+
dimension = 4
|
| 135 |
+
else:
|
| 136 |
+
logger.warn(f"Found unknown image mode: {self.mode}.")
|
| 137 |
+
return IMAGE_ENCODING_RATIO_ESTIMATE_DEFAULT
|
| 138 |
+
height, width = self.size
|
| 139 |
+
single_image_size = height * width * dimension
|
| 140 |
+
total_estimated_size = single_image_size * num_files
|
| 141 |
+
total_file_size = sum(p[1] for p in non_empty_path_and_size)
|
| 142 |
+
ratio = total_estimated_size / total_file_size
|
| 143 |
+
else:
|
| 144 |
+
# TODO(chengsu): sample images to estimate data size
|
| 145 |
+
ratio = IMAGE_ENCODING_RATIO_ESTIMATE_DEFAULT
|
| 146 |
+
|
| 147 |
+
sampling_duration = time.perf_counter() - start_time
|
| 148 |
+
if sampling_duration > 5:
|
| 149 |
+
logger.warn(
|
| 150 |
+
"Image input size estimation took "
|
| 151 |
+
f"{round(sampling_duration, 2)} seconds."
|
| 152 |
+
)
|
| 153 |
+
logger.debug(f"Estimated image encoding ratio from sampling is {ratio}.")
|
| 154 |
+
return max(ratio, IMAGE_ENCODING_RATIO_ESTIMATE_LOWER_BOUND)
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class ImageFileMetadataProvider(DefaultFileMetadataProvider):
|
| 158 |
+
def _set_encoding_ratio(self, encoding_ratio: int):
|
| 159 |
+
"""Set image file encoding ratio, to provide accurate size in bytes metadata."""
|
| 160 |
+
self._encoding_ratio = encoding_ratio
|
| 161 |
+
|
| 162 |
+
def _get_block_metadata(
|
| 163 |
+
self,
|
| 164 |
+
paths: List[str],
|
| 165 |
+
schema: Optional[Union[type, "pyarrow.lib.Schema"]],
|
| 166 |
+
*,
|
| 167 |
+
rows_per_file: Optional[int],
|
| 168 |
+
file_sizes: List[Optional[int]],
|
| 169 |
+
) -> BlockMetadata:
|
| 170 |
+
metadata = super()._get_block_metadata(
|
| 171 |
+
paths, schema, rows_per_file=rows_per_file, file_sizes=file_sizes
|
| 172 |
+
)
|
| 173 |
+
if metadata.size_bytes is not None:
|
| 174 |
+
metadata.size_bytes = int(metadata.size_bytes * self._encoding_ratio)
|
| 175 |
+
return metadata
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/json_datasource.py
ADDED
|
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from io import BytesIO
|
| 3 |
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
|
| 4 |
+
|
| 5 |
+
from ray.air.util.tensor_extensions.arrow import pyarrow_table_from_pydict
|
| 6 |
+
from ray.data.context import DataContext
|
| 7 |
+
from ray.data.datasource.file_based_datasource import FileBasedDatasource
|
| 8 |
+
|
| 9 |
+
if TYPE_CHECKING:
|
| 10 |
+
import pyarrow
|
| 11 |
+
|
| 12 |
+
logger = logging.getLogger(__name__)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class JSONDatasource(FileBasedDatasource):
|
| 16 |
+
"""JSON datasource, for reading and writing JSON and JSONL files."""
|
| 17 |
+
|
| 18 |
+
_FILE_EXTENSIONS = ["json", "jsonl"]
|
| 19 |
+
|
| 20 |
+
def __init__(
|
| 21 |
+
self,
|
| 22 |
+
paths: Union[str, List[str]],
|
| 23 |
+
*,
|
| 24 |
+
arrow_json_args: Optional[Dict[str, Any]] = None,
|
| 25 |
+
**file_based_datasource_kwargs,
|
| 26 |
+
):
|
| 27 |
+
from pyarrow import json
|
| 28 |
+
|
| 29 |
+
super().__init__(paths, **file_based_datasource_kwargs)
|
| 30 |
+
|
| 31 |
+
if arrow_json_args is None:
|
| 32 |
+
arrow_json_args = {}
|
| 33 |
+
|
| 34 |
+
self.read_options = arrow_json_args.pop(
|
| 35 |
+
"read_options", json.ReadOptions(use_threads=False)
|
| 36 |
+
)
|
| 37 |
+
self.arrow_json_args = arrow_json_args
|
| 38 |
+
|
| 39 |
+
def _read_with_pyarrow_read_json(self, buffer: "pyarrow.lib.Buffer"):
|
| 40 |
+
"""Read with PyArrow JSON reader, trying to auto-increase the
|
| 41 |
+
read block size in the case of the read object
|
| 42 |
+
straddling block boundaries."""
|
| 43 |
+
import pyarrow as pa
|
| 44 |
+
|
| 45 |
+
# When reading large files, the default block size configured in PyArrow can be
|
| 46 |
+
# too small, resulting in the following error: `pyarrow.lib.ArrowInvalid:
|
| 47 |
+
# straddling object straddles two block boundaries (try to increase block
|
| 48 |
+
# size?)`. More information on this issue can be found here:
|
| 49 |
+
# https://github.com/apache/arrow/issues/25674
|
| 50 |
+
# The read will be retried with geometrically increasing block size
|
| 51 |
+
# until the size reaches `DataContext.get_current().target_max_block_size`.
|
| 52 |
+
# The initial block size will start at the PyArrow default block size
|
| 53 |
+
# or it can be manually set through the `read_options` parameter as follows.
|
| 54 |
+
# >>> import pyarrow.json as pajson
|
| 55 |
+
# >>> block_size = 10 << 20 # Set block size to 10MB
|
| 56 |
+
# >>> ray.data.read_json( # doctest: +SKIP
|
| 57 |
+
# ... "s3://anonymous@ray-example-data/log.json",
|
| 58 |
+
# ... read_options=pajson.ReadOptions(block_size=block_size)
|
| 59 |
+
# ... )
|
| 60 |
+
|
| 61 |
+
init_block_size = self.read_options.block_size
|
| 62 |
+
max_block_size = DataContext.get_current().target_max_block_size
|
| 63 |
+
while True:
|
| 64 |
+
try:
|
| 65 |
+
yield pa.json.read_json(
|
| 66 |
+
BytesIO(buffer),
|
| 67 |
+
read_options=self.read_options,
|
| 68 |
+
**self.arrow_json_args,
|
| 69 |
+
)
|
| 70 |
+
self.read_options.block_size = init_block_size
|
| 71 |
+
break
|
| 72 |
+
except pa.ArrowInvalid as e:
|
| 73 |
+
if "straddling object straddles two block boundaries" in str(e):
|
| 74 |
+
if self.read_options.block_size < max_block_size:
|
| 75 |
+
# Increase the block size in case it was too small.
|
| 76 |
+
logger.debug(
|
| 77 |
+
f"JSONDatasource read failed with "
|
| 78 |
+
f"block_size={self.read_options.block_size}. Retrying with "
|
| 79 |
+
f"block_size={self.read_options.block_size * 2}."
|
| 80 |
+
)
|
| 81 |
+
self.read_options.block_size *= 2
|
| 82 |
+
else:
|
| 83 |
+
raise pa.ArrowInvalid(
|
| 84 |
+
f"{e} - Auto-increasing block size to "
|
| 85 |
+
f"{self.read_options.block_size} bytes failed. "
|
| 86 |
+
f"Please try manually increasing the block size through "
|
| 87 |
+
f"the `read_options` parameter to a larger size. "
|
| 88 |
+
f"For example: `read_json(..., read_options="
|
| 89 |
+
f"pyarrow.json.ReadOptions(block_size=10 << 25))`"
|
| 90 |
+
f"More information on this issue can be found here: "
|
| 91 |
+
f"https://github.com/apache/arrow/issues/25674"
|
| 92 |
+
)
|
| 93 |
+
else:
|
| 94 |
+
# unrelated error, simply reraise
|
| 95 |
+
raise e
|
| 96 |
+
|
| 97 |
+
def _read_with_python_json(self, buffer: "pyarrow.lib.Buffer"):
|
| 98 |
+
"""Fallback method to read JSON files with Python's native json.load(),
|
| 99 |
+
in case the default pyarrow json reader fails."""
|
| 100 |
+
import json
|
| 101 |
+
|
| 102 |
+
import pyarrow as pa
|
| 103 |
+
|
| 104 |
+
# Check if the buffer is empty
|
| 105 |
+
if buffer.size == 0:
|
| 106 |
+
return
|
| 107 |
+
|
| 108 |
+
parsed_json = json.load(BytesIO(buffer))
|
| 109 |
+
try:
|
| 110 |
+
yield pa.Table.from_pylist(parsed_json)
|
| 111 |
+
except AttributeError as e:
|
| 112 |
+
# For PyArrow < 7.0.0, `pa.Table.from_pylist()` is not available.
|
| 113 |
+
# Construct a dict from the list and call
|
| 114 |
+
# `pa.Table.from_pydict()` instead.
|
| 115 |
+
assert "no attribute 'from_pylist'" in str(e), str(e)
|
| 116 |
+
from collections import defaultdict
|
| 117 |
+
|
| 118 |
+
dct = defaultdict(list)
|
| 119 |
+
for row in parsed_json:
|
| 120 |
+
for k, v in row.items():
|
| 121 |
+
dct[k].append(v)
|
| 122 |
+
yield pyarrow_table_from_pydict(dct)
|
| 123 |
+
|
| 124 |
+
# TODO(ekl) The PyArrow JSON reader doesn't support streaming reads.
|
| 125 |
+
def _read_stream(self, f: "pyarrow.NativeFile", path: str):
|
| 126 |
+
import pyarrow as pa
|
| 127 |
+
|
| 128 |
+
buffer: pa.lib.Buffer = f.read_buffer()
|
| 129 |
+
|
| 130 |
+
try:
|
| 131 |
+
yield from self._read_with_pyarrow_read_json(buffer)
|
| 132 |
+
except pa.ArrowInvalid as e:
|
| 133 |
+
# If read with PyArrow fails, try falling back to native json.load().
|
| 134 |
+
logger.warning(
|
| 135 |
+
f"Error reading with pyarrow.json.read_json(). "
|
| 136 |
+
f"Falling back to native json.load(), which may be slower. "
|
| 137 |
+
f"PyArrow error was:\n{e}"
|
| 138 |
+
)
|
| 139 |
+
yield from self._read_with_python_json(buffer)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/lance_datasource.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional
|
| 3 |
+
|
| 4 |
+
import numpy as np
|
| 5 |
+
|
| 6 |
+
from ray.data._internal.util import _check_import, call_with_retry
|
| 7 |
+
from ray.data.block import BlockMetadata
|
| 8 |
+
from ray.data.context import DataContext
|
| 9 |
+
from ray.data.datasource.datasource import Datasource, ReadTask
|
| 10 |
+
|
| 11 |
+
if TYPE_CHECKING:
|
| 12 |
+
import pyarrow
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
logger = logging.getLogger(__name__)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class LanceDatasource(Datasource):
|
| 19 |
+
"""Lance datasource, for reading Lance dataset."""
|
| 20 |
+
|
| 21 |
+
# Errors to retry when reading Lance fragments.
|
| 22 |
+
READ_FRAGMENTS_ERRORS_TO_RETRY = ["LanceError(IO)"]
|
| 23 |
+
# Maximum number of attempts to read Lance fragments.
|
| 24 |
+
READ_FRAGMENTS_MAX_ATTEMPTS = 10
|
| 25 |
+
# Maximum backoff seconds between attempts to read Lance fragments.
|
| 26 |
+
READ_FRAGMENTS_RETRY_MAX_BACKOFF_SECONDS = 32
|
| 27 |
+
|
| 28 |
+
def __init__(
|
| 29 |
+
self,
|
| 30 |
+
uri: str,
|
| 31 |
+
columns: Optional[List[str]] = None,
|
| 32 |
+
filter: Optional[str] = None,
|
| 33 |
+
storage_options: Optional[Dict[str, str]] = None,
|
| 34 |
+
scanner_options: Optional[Dict[str, Any]] = None,
|
| 35 |
+
):
|
| 36 |
+
_check_import(self, module="lance", package="pylance")
|
| 37 |
+
|
| 38 |
+
import lance
|
| 39 |
+
|
| 40 |
+
self.uri = uri
|
| 41 |
+
self.scanner_options = scanner_options or {}
|
| 42 |
+
if columns is not None:
|
| 43 |
+
self.scanner_options["columns"] = columns
|
| 44 |
+
if filter is not None:
|
| 45 |
+
self.scanner_options["filter"] = filter
|
| 46 |
+
self.storage_options = storage_options
|
| 47 |
+
self.lance_ds = lance.dataset(uri=uri, storage_options=storage_options)
|
| 48 |
+
|
| 49 |
+
match = []
|
| 50 |
+
match.extend(self.READ_FRAGMENTS_ERRORS_TO_RETRY)
|
| 51 |
+
match.extend(DataContext.get_current().retried_io_errors)
|
| 52 |
+
self._retry_params = {
|
| 53 |
+
"description": "read lance fragments",
|
| 54 |
+
"match": match,
|
| 55 |
+
"max_attempts": self.READ_FRAGMENTS_MAX_ATTEMPTS,
|
| 56 |
+
"max_backoff_s": self.READ_FRAGMENTS_RETRY_MAX_BACKOFF_SECONDS,
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
def get_read_tasks(self, parallelism: int) -> List[ReadTask]:
|
| 60 |
+
read_tasks = []
|
| 61 |
+
for fragments in np.array_split(self.lance_ds.get_fragments(), parallelism):
|
| 62 |
+
if len(fragments) <= 0:
|
| 63 |
+
continue
|
| 64 |
+
|
| 65 |
+
fragment_ids = [f.metadata.id for f in fragments]
|
| 66 |
+
num_rows = sum(f.count_rows() for f in fragments)
|
| 67 |
+
input_files = [
|
| 68 |
+
data_file.path() for f in fragments for data_file in f.data_files()
|
| 69 |
+
]
|
| 70 |
+
|
| 71 |
+
# TODO(chengsu): Take column projection into consideration for schema.
|
| 72 |
+
metadata = BlockMetadata(
|
| 73 |
+
num_rows=num_rows,
|
| 74 |
+
schema=fragments[0].schema,
|
| 75 |
+
input_files=input_files,
|
| 76 |
+
size_bytes=None,
|
| 77 |
+
exec_stats=None,
|
| 78 |
+
)
|
| 79 |
+
scanner_options = self.scanner_options
|
| 80 |
+
lance_ds = self.lance_ds
|
| 81 |
+
retry_params = self._retry_params
|
| 82 |
+
|
| 83 |
+
read_task = ReadTask(
|
| 84 |
+
lambda f=fragment_ids: _read_fragments_with_retry(
|
| 85 |
+
f,
|
| 86 |
+
lance_ds,
|
| 87 |
+
scanner_options,
|
| 88 |
+
retry_params,
|
| 89 |
+
),
|
| 90 |
+
metadata,
|
| 91 |
+
)
|
| 92 |
+
read_tasks.append(read_task)
|
| 93 |
+
|
| 94 |
+
return read_tasks
|
| 95 |
+
|
| 96 |
+
def estimate_inmemory_data_size(self) -> Optional[int]:
|
| 97 |
+
# TODO(chengsu): Add memory size estimation to improve auto-tune of parallelism.
|
| 98 |
+
return None
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def _read_fragments_with_retry(
|
| 102 |
+
fragment_ids,
|
| 103 |
+
lance_ds,
|
| 104 |
+
scanner_options,
|
| 105 |
+
retry_params,
|
| 106 |
+
) -> Iterator["pyarrow.Table"]:
|
| 107 |
+
return call_with_retry(
|
| 108 |
+
lambda: _read_fragments(fragment_ids, lance_ds, scanner_options),
|
| 109 |
+
**retry_params,
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _read_fragments(
|
| 114 |
+
fragment_ids,
|
| 115 |
+
lance_ds,
|
| 116 |
+
scanner_options,
|
| 117 |
+
) -> Iterator["pyarrow.Table"]:
|
| 118 |
+
"""Read Lance fragments in batches.
|
| 119 |
+
|
| 120 |
+
NOTE: Use fragment ids, instead of fragments as parameter, because pickling
|
| 121 |
+
LanceFragment is expensive.
|
| 122 |
+
"""
|
| 123 |
+
import pyarrow
|
| 124 |
+
|
| 125 |
+
fragments = [lance_ds.get_fragment(id) for id in fragment_ids]
|
| 126 |
+
scanner_options["fragments"] = fragments
|
| 127 |
+
scanner = lance_ds.scanner(**scanner_options)
|
| 128 |
+
for batch in scanner.to_reader():
|
| 129 |
+
yield pyarrow.Table.from_batches([batch])
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/datasource/webdataset_datasource.py
ADDED
|
@@ -0,0 +1,365 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright NVIDIA Corporation 2023
|
| 2 |
+
# SPDX-License-Identifier: Apache-2.0
|
| 3 |
+
|
| 4 |
+
import fnmatch
|
| 5 |
+
import io
|
| 6 |
+
import re
|
| 7 |
+
import tarfile
|
| 8 |
+
from functools import partial
|
| 9 |
+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
|
| 10 |
+
|
| 11 |
+
import ray
|
| 12 |
+
from ray.data._internal.util import iterate_with_retry
|
| 13 |
+
from ray.data.block import BlockAccessor
|
| 14 |
+
from ray.data.datasource.file_based_datasource import FileBasedDatasource
|
| 15 |
+
|
| 16 |
+
if TYPE_CHECKING:
|
| 17 |
+
import pyarrow
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _base_plus_ext(path: str):
|
| 21 |
+
"""Split off all file extensions.
|
| 22 |
+
|
| 23 |
+
Returns base, allext.
|
| 24 |
+
|
| 25 |
+
Args:
|
| 26 |
+
path: path with extensions
|
| 27 |
+
|
| 28 |
+
Returns:
|
| 29 |
+
str: path with all extensions removed
|
| 30 |
+
"""
|
| 31 |
+
match = re.match(r"^((?:.*/|)[^.]+)[.]([^/]*)$", path)
|
| 32 |
+
if not match:
|
| 33 |
+
return None, None
|
| 34 |
+
return match.group(1), match.group(2)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _valid_sample(sample: Dict[str, Any]):
|
| 38 |
+
"""Check whether a sample is valid.
|
| 39 |
+
|
| 40 |
+
Args:
|
| 41 |
+
sample: sample to be checked
|
| 42 |
+
"""
|
| 43 |
+
return (
|
| 44 |
+
sample is not None
|
| 45 |
+
and isinstance(sample, dict)
|
| 46 |
+
and len(list(sample.keys())) > 0
|
| 47 |
+
and not sample.get("__bad__", False)
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _apply_list(
|
| 52 |
+
f: Union[Callable, List[Callable]], sample: Dict[str, Any], default: Callable = None
|
| 53 |
+
):
|
| 54 |
+
"""Apply a list of functions to a sample.
|
| 55 |
+
|
| 56 |
+
Args:
|
| 57 |
+
f: function or list of functions
|
| 58 |
+
sample: sample to be modified
|
| 59 |
+
default: default function to be applied to all keys.
|
| 60 |
+
Defaults to None.
|
| 61 |
+
|
| 62 |
+
Returns:
|
| 63 |
+
modified sample
|
| 64 |
+
"""
|
| 65 |
+
if f is None:
|
| 66 |
+
return sample
|
| 67 |
+
if not isinstance(f, list):
|
| 68 |
+
f = [f]
|
| 69 |
+
for g in f:
|
| 70 |
+
if default is not None and not callable(g):
|
| 71 |
+
g = partial(default, format=g)
|
| 72 |
+
sample = g(sample)
|
| 73 |
+
return sample
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def _check_suffix(suffix: str, suffixes: Union[list, callable]):
|
| 77 |
+
"""Check whether a suffix is valid.
|
| 78 |
+
|
| 79 |
+
Suffixes can be either None (=accept everything), a callable,
|
| 80 |
+
or a list of patterns. If the pattern contains */? it is treated
|
| 81 |
+
as a glob pattern, otherwise it is treated as a literal.
|
| 82 |
+
|
| 83 |
+
Args:
|
| 84 |
+
suffix: suffix to be checked
|
| 85 |
+
suffixes: list of valid suffixes
|
| 86 |
+
"""
|
| 87 |
+
if suffixes is None:
|
| 88 |
+
return True
|
| 89 |
+
if callable(suffixes):
|
| 90 |
+
return suffixes(suffix)
|
| 91 |
+
for pattern in suffixes:
|
| 92 |
+
if "*" in pattern or "?" in pattern:
|
| 93 |
+
if fnmatch.fnmatch("." + suffix, pattern):
|
| 94 |
+
return True
|
| 95 |
+
elif suffix == pattern or "." + suffix == pattern:
|
| 96 |
+
return True
|
| 97 |
+
return False
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def _tar_file_iterator(
|
| 101 |
+
fileobj: Any,
|
| 102 |
+
fileselect: Optional[Union[bool, callable, list]] = None,
|
| 103 |
+
filerename: Optional[Union[bool, callable, list]] = None,
|
| 104 |
+
verbose_open: bool = False,
|
| 105 |
+
meta: dict = None,
|
| 106 |
+
):
|
| 107 |
+
"""Iterate over tar file, yielding filename, content pairs for the given tar stream.
|
| 108 |
+
|
| 109 |
+
Args:
|
| 110 |
+
fileobj: file object
|
| 111 |
+
fileselect: patterns or function selecting
|
| 112 |
+
files to be selected
|
| 113 |
+
meta: metadata to be added to each sample
|
| 114 |
+
"""
|
| 115 |
+
meta = meta or {}
|
| 116 |
+
stream = tarfile.open(fileobj=fileobj, mode="r|*")
|
| 117 |
+
if verbose_open:
|
| 118 |
+
print(f"start {meta}")
|
| 119 |
+
for tarinfo in stream:
|
| 120 |
+
fname = tarinfo.name
|
| 121 |
+
if not tarinfo.isreg() or fname is None:
|
| 122 |
+
continue
|
| 123 |
+
data = stream.extractfile(tarinfo).read()
|
| 124 |
+
fname = _apply_list(filerename, fname)
|
| 125 |
+
assert isinstance(fname, str)
|
| 126 |
+
if not _check_suffix(fname, fileselect):
|
| 127 |
+
continue
|
| 128 |
+
result = dict(fname=fname, data=data)
|
| 129 |
+
yield result
|
| 130 |
+
if verbose_open:
|
| 131 |
+
print(f"done {meta}")
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _group_by_keys(
|
| 135 |
+
data: List[Dict[str, Any]],
|
| 136 |
+
keys: callable = _base_plus_ext,
|
| 137 |
+
suffixes: Optional[Union[list, callable]] = None,
|
| 138 |
+
meta: dict = None,
|
| 139 |
+
):
|
| 140 |
+
"""Return function over iterator that groups key, value pairs into samples.
|
| 141 |
+
|
| 142 |
+
Args:
|
| 143 |
+
data: iterator over key, value pairs
|
| 144 |
+
keys: function that returns key, suffix for a given key
|
| 145 |
+
suffixes: list of suffixes to be included in the sample
|
| 146 |
+
meta: metadata to be added to each sample
|
| 147 |
+
"""
|
| 148 |
+
meta = meta or {}
|
| 149 |
+
current_sample = None
|
| 150 |
+
for filesample in data:
|
| 151 |
+
assert isinstance(filesample, dict)
|
| 152 |
+
fname, value = filesample["fname"], filesample["data"]
|
| 153 |
+
prefix, suffix = keys(fname)
|
| 154 |
+
if prefix is None:
|
| 155 |
+
continue
|
| 156 |
+
if current_sample is None or prefix != current_sample["__key__"]:
|
| 157 |
+
if _valid_sample(current_sample):
|
| 158 |
+
current_sample.update(meta)
|
| 159 |
+
yield current_sample
|
| 160 |
+
current_sample = dict(__key__=prefix)
|
| 161 |
+
if "__url__" in filesample:
|
| 162 |
+
current_sample["__url__"] = filesample["__url__"]
|
| 163 |
+
if suffix in current_sample:
|
| 164 |
+
raise ValueError(
|
| 165 |
+
f"{fname}: duplicate file name in tar file "
|
| 166 |
+
+ f"{suffix} {current_sample.keys()}"
|
| 167 |
+
)
|
| 168 |
+
if suffixes is None or _check_suffix(suffix, suffixes):
|
| 169 |
+
current_sample[suffix] = value
|
| 170 |
+
if _valid_sample(current_sample):
|
| 171 |
+
current_sample.update(meta)
|
| 172 |
+
yield current_sample
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def _default_decoder(sample: Dict[str, Any], format: Optional[Union[bool, str]] = True):
|
| 176 |
+
"""A default decoder for webdataset.
|
| 177 |
+
|
| 178 |
+
This handles common file extensions: .txt, .cls, .cls2,
|
| 179 |
+
.jpg, .png, .json, .npy, .mp, .pt, .pth, .pickle, .pkl.
|
| 180 |
+
These are the most common extensions used in webdataset.
|
| 181 |
+
For other extensions, users can provide their own decoder.
|
| 182 |
+
|
| 183 |
+
Args:
|
| 184 |
+
sample: sample, modified in place
|
| 185 |
+
"""
|
| 186 |
+
sample = dict(sample)
|
| 187 |
+
for key, value in sample.items():
|
| 188 |
+
extension = key.split(".")[-1]
|
| 189 |
+
if key.startswith("__"):
|
| 190 |
+
continue
|
| 191 |
+
elif extension in ["txt", "text"]:
|
| 192 |
+
sample[key] = value.decode("utf-8")
|
| 193 |
+
elif extension in ["cls", "cls2"]:
|
| 194 |
+
sample[key] = int(value.decode("utf-8"))
|
| 195 |
+
elif extension in ["jpg", "png", "ppm", "pgm", "pbm", "pnm"]:
|
| 196 |
+
import numpy as np
|
| 197 |
+
import PIL.Image
|
| 198 |
+
|
| 199 |
+
if format == "PIL":
|
| 200 |
+
sample[key] = PIL.Image.open(io.BytesIO(value))
|
| 201 |
+
else:
|
| 202 |
+
sample[key] = np.asarray(PIL.Image.open(io.BytesIO(value)))
|
| 203 |
+
elif extension == "json":
|
| 204 |
+
import json
|
| 205 |
+
|
| 206 |
+
sample[key] = json.loads(value)
|
| 207 |
+
elif extension == "npy":
|
| 208 |
+
import numpy as np
|
| 209 |
+
|
| 210 |
+
sample[key] = np.load(io.BytesIO(value))
|
| 211 |
+
elif extension == "mp":
|
| 212 |
+
import msgpack
|
| 213 |
+
|
| 214 |
+
sample[key] = msgpack.unpackb(value, raw=False)
|
| 215 |
+
elif extension in ["pt", "pth"]:
|
| 216 |
+
import torch
|
| 217 |
+
|
| 218 |
+
sample[key] = torch.load(io.BytesIO(value))
|
| 219 |
+
elif extension in ["pickle", "pkl"]:
|
| 220 |
+
import pickle
|
| 221 |
+
|
| 222 |
+
sample[key] = pickle.loads(value)
|
| 223 |
+
return sample
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
extension_to_format = {"jpg": "jpeg"}
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def _default_encoder(sample: Dict[str, Any], format: Optional[Union[str, bool]] = True):
|
| 230 |
+
"""A default encoder for webdataset.
|
| 231 |
+
|
| 232 |
+
This handles common file extensions: .txt, .cls, .cls2, .jpg,
|
| 233 |
+
.png, .json, .npy, .mp, .pt, .pth, .pickle, .pkl
|
| 234 |
+
These are the most common extensions used in webdataset.
|
| 235 |
+
For other extensions, users can provide their own encoder.
|
| 236 |
+
|
| 237 |
+
Args:
|
| 238 |
+
sample (Dict[str, Any]): sample
|
| 239 |
+
"""
|
| 240 |
+
sample = dict(sample)
|
| 241 |
+
for key, value in sample.items():
|
| 242 |
+
extension = key.split(".")[-1]
|
| 243 |
+
if key.startswith("__"):
|
| 244 |
+
continue
|
| 245 |
+
elif extension in ["txt"]:
|
| 246 |
+
sample[key] = value.encode("utf-8")
|
| 247 |
+
elif extension in ["cls", "cls2"]:
|
| 248 |
+
sample[key] = str(value).encode("utf-8")
|
| 249 |
+
elif extension in ["jpg", "jpeg", "png", "ppm", "pgm", "pbm", "pnm"]:
|
| 250 |
+
import numpy as np
|
| 251 |
+
import PIL.Image
|
| 252 |
+
|
| 253 |
+
if isinstance(value, np.ndarray):
|
| 254 |
+
value = PIL.Image.fromarray(value)
|
| 255 |
+
assert isinstance(value, PIL.Image.Image)
|
| 256 |
+
stream = io.BytesIO()
|
| 257 |
+
value.save(
|
| 258 |
+
stream, format=extension_to_format.get(extension.lower(), extension)
|
| 259 |
+
)
|
| 260 |
+
sample[key] = stream.getvalue()
|
| 261 |
+
elif extension == "json":
|
| 262 |
+
import json
|
| 263 |
+
|
| 264 |
+
sample[key] = json.dumps(value).encode("utf-8")
|
| 265 |
+
elif extension == "npy":
|
| 266 |
+
import numpy as np
|
| 267 |
+
|
| 268 |
+
stream = io.BytesIO()
|
| 269 |
+
np.save(stream, value)
|
| 270 |
+
sample[key] = stream.getvalue()
|
| 271 |
+
elif extension == "mp":
|
| 272 |
+
import msgpack
|
| 273 |
+
|
| 274 |
+
sample[key] = msgpack.dumps(value)
|
| 275 |
+
elif extension in ["pt", "pth"]:
|
| 276 |
+
import torch
|
| 277 |
+
|
| 278 |
+
stream = io.BytesIO()
|
| 279 |
+
torch.save(value, stream)
|
| 280 |
+
sample[key] = stream.getvalue()
|
| 281 |
+
elif extension in ["pickle", "pkl"]:
|
| 282 |
+
import pickle
|
| 283 |
+
|
| 284 |
+
stream = io.BytesIO()
|
| 285 |
+
pickle.dump(value, stream)
|
| 286 |
+
sample[key] = stream.getvalue()
|
| 287 |
+
return sample
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def _make_iterable(block: BlockAccessor):
|
| 291 |
+
"""Make a block iterable.
|
| 292 |
+
|
| 293 |
+
This is a placeholder for dealing with more complex blocks.
|
| 294 |
+
|
| 295 |
+
Args:
|
| 296 |
+
block: Ray Dataset block
|
| 297 |
+
|
| 298 |
+
Returns:
|
| 299 |
+
Iterable[Dict[str,Any]]: Iterable of samples
|
| 300 |
+
"""
|
| 301 |
+
return block.iter_rows(public_row_format=False)
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
class WebDatasetDatasource(FileBasedDatasource):
|
| 305 |
+
"""A Datasource for WebDataset datasets (tar format with naming conventions)."""
|
| 306 |
+
|
| 307 |
+
_FILE_EXTENSIONS = ["tar"]
|
| 308 |
+
|
| 309 |
+
def __init__(
|
| 310 |
+
self,
|
| 311 |
+
paths: Union[str, List[str]],
|
| 312 |
+
decoder: Optional[Union[bool, str, callable, list]] = True,
|
| 313 |
+
fileselect: Optional[Union[bool, callable, list]] = None,
|
| 314 |
+
filerename: Optional[Union[bool, callable, list]] = None,
|
| 315 |
+
suffixes: Optional[Union[bool, callable, list]] = None,
|
| 316 |
+
verbose_open: bool = False,
|
| 317 |
+
**file_based_datasource_kwargs,
|
| 318 |
+
):
|
| 319 |
+
super().__init__(paths, **file_based_datasource_kwargs)
|
| 320 |
+
|
| 321 |
+
self.decoder = decoder
|
| 322 |
+
self.fileselect = fileselect
|
| 323 |
+
self.filerename = filerename
|
| 324 |
+
self.suffixes = suffixes
|
| 325 |
+
self.verbose_open = verbose_open
|
| 326 |
+
|
| 327 |
+
def _read_stream(self, stream: "pyarrow.NativeFile", path: str):
|
| 328 |
+
"""Read and decode samples from a stream.
|
| 329 |
+
|
| 330 |
+
Note that fileselect selects files during reading, while suffixes
|
| 331 |
+
selects files during the grouping step.
|
| 332 |
+
|
| 333 |
+
Args:
|
| 334 |
+
stream: File descriptor to read from.
|
| 335 |
+
path: Path to the data.
|
| 336 |
+
decoder: decoder or list of decoders to be applied to samples
|
| 337 |
+
fileselect: Predicate for skipping files in tar decoder.
|
| 338 |
+
Defaults to lambda_:False.
|
| 339 |
+
suffixes: List of suffixes to be extracted. Defaults to None.
|
| 340 |
+
verbose_open: Print message when opening files. Defaults to False.
|
| 341 |
+
|
| 342 |
+
Yields:
|
| 343 |
+
List[Dict[str, Any]]: List of sample (list of length 1).
|
| 344 |
+
"""
|
| 345 |
+
import pandas as pd
|
| 346 |
+
|
| 347 |
+
def get_tar_file_iterator():
|
| 348 |
+
return _tar_file_iterator(
|
| 349 |
+
stream,
|
| 350 |
+
fileselect=self.fileselect,
|
| 351 |
+
filerename=self.filerename,
|
| 352 |
+
verbose_open=self.verbose_open,
|
| 353 |
+
)
|
| 354 |
+
|
| 355 |
+
# S3 can raise transient errors during iteration
|
| 356 |
+
ctx = ray.data.DataContext.get_current()
|
| 357 |
+
files = iterate_with_retry(
|
| 358 |
+
get_tar_file_iterator, "iterate tar file", match=ctx.retried_io_errors
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
samples = _group_by_keys(files, meta=dict(__url__=path), suffixes=self.suffixes)
|
| 362 |
+
for sample in samples:
|
| 363 |
+
if self.decoder is not None:
|
| 364 |
+
sample = _apply_list(self.decoder, sample, default=_default_decoder)
|
| 365 |
+
yield pd.DataFrame({k: [v] for k, v in sample.items()})
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (180 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/optimizers.cpython-310.pyc
ADDED
|
Binary file (3.35 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/__pycache__/util.cpython-310.pyc
ADDED
|
Binary file (2.39 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__init__.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .logical_operator import LogicalOperator
|
| 2 |
+
from .logical_plan import LogicalPlan
|
| 3 |
+
from .operator import Operator
|
| 4 |
+
from .optimizer import Optimizer, Rule
|
| 5 |
+
from .physical_plan import PhysicalPlan
|
| 6 |
+
from .plan import Plan
|
| 7 |
+
|
| 8 |
+
__all__ = [
|
| 9 |
+
"LogicalOperator",
|
| 10 |
+
"LogicalPlan",
|
| 11 |
+
"Operator",
|
| 12 |
+
"Optimizer",
|
| 13 |
+
"PhysicalPlan",
|
| 14 |
+
"Plan",
|
| 15 |
+
"Rule",
|
| 16 |
+
]
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (533 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/logical_operator.cpython-310.pyc
ADDED
|
Binary file (3.55 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/logical_plan.cpython-310.pyc
ADDED
|
Binary file (1.35 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/operator.cpython-310.pyc
ADDED
|
Binary file (2.3 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/optimizer.cpython-310.pyc
ADDED
|
Binary file (1.37 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/physical_plan.cpython-310.pyc
ADDED
|
Binary file (1.42 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/__pycache__/plan.cpython-310.pyc
ADDED
|
Binary file (1.14 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/logical_operator.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Iterator, List, Optional
|
| 2 |
+
|
| 3 |
+
from .operator import Operator
|
| 4 |
+
from ray.data.block import BlockMetadata
|
| 5 |
+
|
| 6 |
+
if TYPE_CHECKING:
|
| 7 |
+
from ray.data._internal.execution.interfaces import RefBundle
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class LogicalOperator(Operator):
|
| 11 |
+
"""Abstract class for logical operators.
|
| 12 |
+
|
| 13 |
+
A logical operator describes transformation, and later is converted into
|
| 14 |
+
physical operator.
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
name: str,
|
| 20 |
+
input_dependencies: List["LogicalOperator"],
|
| 21 |
+
num_outputs: Optional[int] = None,
|
| 22 |
+
):
|
| 23 |
+
super().__init__(
|
| 24 |
+
name,
|
| 25 |
+
input_dependencies,
|
| 26 |
+
)
|
| 27 |
+
for x in input_dependencies:
|
| 28 |
+
assert isinstance(x, LogicalOperator), x
|
| 29 |
+
self._num_outputs = num_outputs
|
| 30 |
+
|
| 31 |
+
def estimated_num_outputs(self) -> Optional[int]:
|
| 32 |
+
"""Returns the estimated number of blocks that
|
| 33 |
+
would be outputted by this logical operator.
|
| 34 |
+
|
| 35 |
+
This method does not execute the plan, so it does not take into consideration
|
| 36 |
+
block splitting. This method only considers high-level block constraints like
|
| 37 |
+
`Dataset.repartition(num_blocks=X)`. A more accurate estimation can be given by
|
| 38 |
+
`PhysicalOperator.num_outputs_total()` during execution.
|
| 39 |
+
"""
|
| 40 |
+
if self._num_outputs is not None:
|
| 41 |
+
return self._num_outputs
|
| 42 |
+
elif len(self._input_dependencies) == 1:
|
| 43 |
+
return self._input_dependencies[0].estimated_num_outputs()
|
| 44 |
+
return None
|
| 45 |
+
|
| 46 |
+
# Override the following 3 methods to correct type hints.
|
| 47 |
+
|
| 48 |
+
@property
|
| 49 |
+
def input_dependencies(self) -> List["LogicalOperator"]:
|
| 50 |
+
return super().input_dependencies # type: ignore
|
| 51 |
+
|
| 52 |
+
@property
|
| 53 |
+
def output_dependencies(self) -> List["LogicalOperator"]:
|
| 54 |
+
return super().output_dependencies # type: ignore
|
| 55 |
+
|
| 56 |
+
def post_order_iter(self) -> Iterator["LogicalOperator"]:
|
| 57 |
+
return super().post_order_iter() # type: ignore
|
| 58 |
+
|
| 59 |
+
def output_data(self) -> Optional[List["RefBundle"]]:
|
| 60 |
+
"""The output data of this operator, or ``None`` if not known."""
|
| 61 |
+
return None
|
| 62 |
+
|
| 63 |
+
def aggregate_output_metadata(self) -> BlockMetadata:
|
| 64 |
+
"""A ``BlockMetadata`` that represents the aggregate metadata of the outputs.
|
| 65 |
+
|
| 66 |
+
This method is used by methods like :meth:`~ray.data.Dataset.schema` to
|
| 67 |
+
efficiently return metadata.
|
| 68 |
+
"""
|
| 69 |
+
return BlockMetadata(None, None, None, None, None)
|
| 70 |
+
|
| 71 |
+
def is_lineage_serializable(self) -> bool:
|
| 72 |
+
"""Returns whether the lineage of this operator can be serialized.
|
| 73 |
+
|
| 74 |
+
An operator is lineage serializable if you can serialize it on one machine and
|
| 75 |
+
deserialize it on another without losing information. Operators that store
|
| 76 |
+
object references (e.g., ``InputData``) aren't lineage serializable because the
|
| 77 |
+
objects aren't available on the deserialized machine.
|
| 78 |
+
"""
|
| 79 |
+
return True
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/logical_plan.py
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, List
|
| 2 |
+
|
| 3 |
+
from .logical_operator import LogicalOperator
|
| 4 |
+
from .plan import Plan
|
| 5 |
+
|
| 6 |
+
if TYPE_CHECKING:
|
| 7 |
+
from ray.data import DataContext
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class LogicalPlan(Plan):
|
| 11 |
+
"""The plan with a DAG of logical operators."""
|
| 12 |
+
|
| 13 |
+
def __init__(self, dag: LogicalOperator, context: "DataContext"):
|
| 14 |
+
super().__init__(context)
|
| 15 |
+
self._dag = dag
|
| 16 |
+
|
| 17 |
+
@property
|
| 18 |
+
def dag(self) -> LogicalOperator:
|
| 19 |
+
"""Get the DAG of logical operators."""
|
| 20 |
+
return self._dag
|
| 21 |
+
|
| 22 |
+
def sources(self) -> List[LogicalOperator]:
|
| 23 |
+
"""List of operators that are sources for this plan's DAG."""
|
| 24 |
+
# If an operator has no input dependencies, it's a source.
|
| 25 |
+
if not any(self._dag.input_dependencies):
|
| 26 |
+
return [self._dag]
|
| 27 |
+
|
| 28 |
+
sources = []
|
| 29 |
+
for op in self._dag.input_dependencies:
|
| 30 |
+
sources.extend(LogicalPlan(op, self._context).sources())
|
| 31 |
+
return sources
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/operator.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Iterator, List
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
class Operator:
|
| 5 |
+
"""Abstract class for operators.
|
| 6 |
+
|
| 7 |
+
Operators live on the driver side of the Dataset only.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
def __init__(
|
| 11 |
+
self,
|
| 12 |
+
name: str,
|
| 13 |
+
input_dependencies: List["Operator"],
|
| 14 |
+
):
|
| 15 |
+
self._name = name
|
| 16 |
+
self._input_dependencies = input_dependencies
|
| 17 |
+
self._output_dependencies = []
|
| 18 |
+
for x in input_dependencies:
|
| 19 |
+
assert isinstance(x, Operator), x
|
| 20 |
+
x._output_dependencies.append(self)
|
| 21 |
+
|
| 22 |
+
@property
|
| 23 |
+
def name(self) -> str:
|
| 24 |
+
return self._name
|
| 25 |
+
|
| 26 |
+
@property
|
| 27 |
+
def input_dependencies(self) -> List["Operator"]:
|
| 28 |
+
"""List of operators that provide inputs for this operator."""
|
| 29 |
+
assert hasattr(
|
| 30 |
+
self, "_input_dependencies"
|
| 31 |
+
), "Operator.__init__() was not called."
|
| 32 |
+
return self._input_dependencies
|
| 33 |
+
|
| 34 |
+
@property
|
| 35 |
+
def output_dependencies(self) -> List["Operator"]:
|
| 36 |
+
"""List of operators that consume outputs from this operator."""
|
| 37 |
+
assert hasattr(
|
| 38 |
+
self, "_output_dependencies"
|
| 39 |
+
), "Operator.__init__() was not called."
|
| 40 |
+
return self._output_dependencies
|
| 41 |
+
|
| 42 |
+
def post_order_iter(self) -> Iterator["Operator"]:
|
| 43 |
+
"""Depth-first traversal of this operator and its input dependencies."""
|
| 44 |
+
for op in self.input_dependencies:
|
| 45 |
+
yield from op.post_order_iter()
|
| 46 |
+
yield self
|
| 47 |
+
|
| 48 |
+
def __repr__(self) -> str:
|
| 49 |
+
if self.input_dependencies:
|
| 50 |
+
out_str = ", ".join([str(x) for x in self.input_dependencies])
|
| 51 |
+
out_str += " -> "
|
| 52 |
+
else:
|
| 53 |
+
out_str = ""
|
| 54 |
+
out_str += f"{self.__class__.__name__}[{self._name}]"
|
| 55 |
+
return out_str
|
| 56 |
+
|
| 57 |
+
def __str__(self) -> str:
|
| 58 |
+
return repr(self)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/optimizer.py
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List
|
| 2 |
+
|
| 3 |
+
from .plan import Plan
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Rule:
|
| 7 |
+
"""Abstract class for optimization rule."""
|
| 8 |
+
|
| 9 |
+
def apply(self, plan: Plan) -> Plan:
|
| 10 |
+
"""Apply the optimization rule to the execution plan."""
|
| 11 |
+
raise NotImplementedError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class Optimizer:
|
| 15 |
+
"""Abstract class for optimizers.
|
| 16 |
+
|
| 17 |
+
An optimizers transforms a DAG of operators with a list of predefined rules.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
@property
|
| 21 |
+
def rules(self) -> List[Rule]:
|
| 22 |
+
"""List of predefined rules for this optimizer."""
|
| 23 |
+
raise NotImplementedError
|
| 24 |
+
|
| 25 |
+
def optimize(self, plan: Plan) -> Plan:
|
| 26 |
+
"""Optimize operators with a list of rules."""
|
| 27 |
+
for rule in self.rules:
|
| 28 |
+
plan = rule.apply(plan)
|
| 29 |
+
return plan
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/interfaces/physical_plan.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Dict
|
| 2 |
+
|
| 3 |
+
from .logical_operator import LogicalOperator
|
| 4 |
+
from .plan import Plan
|
| 5 |
+
|
| 6 |
+
if TYPE_CHECKING:
|
| 7 |
+
from ray.data import DataContext
|
| 8 |
+
from ray.data._internal.execution.interfaces import PhysicalOperator
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class PhysicalPlan(Plan):
|
| 12 |
+
"""The plan with a DAG of physical operators."""
|
| 13 |
+
|
| 14 |
+
def __init__(
|
| 15 |
+
self,
|
| 16 |
+
dag: "PhysicalOperator",
|
| 17 |
+
op_map: Dict["PhysicalOperator", LogicalOperator],
|
| 18 |
+
context: "DataContext",
|
| 19 |
+
):
|
| 20 |
+
super().__init__(context)
|
| 21 |
+
self._dag = dag
|
| 22 |
+
self._op_map = op_map
|
| 23 |
+
|
| 24 |
+
@property
|
| 25 |
+
def dag(self) -> "PhysicalOperator":
|
| 26 |
+
"""Get the DAG of physical operators."""
|
| 27 |
+
return self._dag
|
| 28 |
+
|
| 29 |
+
@property
|
| 30 |
+
def op_map(self) -> Dict["PhysicalOperator", LogicalOperator]:
|
| 31 |
+
"""
|
| 32 |
+
Get a mapping from physical operators to their corresponding logical operator.
|
| 33 |
+
"""
|
| 34 |
+
return self._op_map
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/count_operator.cpython-310.pyc
ADDED
|
Binary file (1.06 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/input_data_operator.cpython-310.pyc
ADDED
|
Binary file (3.54 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/n_ary_operator.cpython-310.pyc
ADDED
|
Binary file (2.23 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/one_to_one_operator.cpython-310.pyc
ADDED
|
Binary file (3.17 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/logical/operators/__pycache__/read_operator.cpython-310.pyc
ADDED
|
Binary file (3.59 kB). View file
|
|
|