Datasets:
Update README.md
Browse files
README.md
CHANGED
|
@@ -4,4 +4,33 @@ language:
|
|
| 4 |
- vi
|
| 5 |
size_categories:
|
| 6 |
- 100K<n<1M
|
| 7 |
-
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
- vi
|
| 5 |
size_categories:
|
| 6 |
- 100K<n<1M
|
| 7 |
+
---
|
| 8 |
+
|
| 9 |
+
Script to download and use the dataset:
|
| 10 |
+
|
| 11 |
+
```python
|
| 12 |
+
import os
|
| 13 |
+
from datasets import load_dataset
|
| 14 |
+
from PIL import Image
|
| 15 |
+
|
| 16 |
+
# 1. Load metadata from Hugging Face
|
| 17 |
+
# This downloads the Parquet file and creates a local cache
|
| 18 |
+
dataset = load_dataset("allietran/CAMEO")
|
| 19 |
+
products = load_dataset("allietran/CAMEO", data_files="products.parquet")
|
| 20 |
+
reviews = load_dataset("allietran/CAMEO", data_files="reviews.parquet")
|
| 21 |
+
|
| 22 |
+
# 2. Define local image root
|
| 23 |
+
# Participants would have cloned the 'images' folder via git-lfs or downloaded shards
|
| 24 |
+
IMAGE_ROOT = "./hf_images"
|
| 25 |
+
|
| 26 |
+
def get_image(example):
|
| 27 |
+
"""Helper to load a PIL image using the relative_path stored in metadata"""
|
| 28 |
+
img_path = os.path.join(IMAGE_ROOT, example['relative_path'])
|
| 29 |
+
if os.path.exists(img_path):
|
| 30 |
+
return Image.open(img_path)
|
| 31 |
+
return None
|
| 32 |
+
|
| 33 |
+
# Example: Load the first product and its image
|
| 34 |
+
print("First product metadata:", products['train'][0])
|
| 35 |
+
print("First review metadata:", reviews['train'][0])
|
| 36 |
+
```
|