File size: 1,611 Bytes
a03ed61
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import os
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
from tqdm import tqdm  # 导入 tqdm 库
import datasets

# 假设 PixmoDataset 类已经定义
root_path = './Datasets/'
data = datasets.load_dataset("allenai/pixmo-points", split="train", cache_dir=root_path)
len_data = len(data)
image_folder = os.path.join(root_path,"pixmo_images")

valid_one_points_indices = '/home/panwen.hu/workspace/jian.zhang/EAI/EAI2025/pixmo-points/Datasets/valid_one_points_indices.json'

def load_json(file_path):
    with open(file_path, 'r', encoding='utf-8') as f:
        data = json.load(f)
    return data


data_json = load_json(valid_one_points_indices)
index_list = data_json.get("index", [])

ins_all = set()

def process_item(i):
    print(i,len_data,f"{(i / len_data) * 100:.3f}%")
    item = data[i]
    instruction = item['label']
    return instruction

# 使用 ThreadPoolExecutor 来并行处理
with ThreadPoolExecutor(max_workers=64) as executor:  # max_workers 可以根据你的CPU核心数调整
    # 提交任务到线程池
    futures = [
        executor.submit(process_item, i)  for i in index_list
    ]
    
    # 使用 tqdm 显示进度
    for future in tqdm(as_completed(futures), total=len_data, desc="Processing"):
        instruction = future.result()
        ins_all.add(instruction)

# 保存结果到 JSON 文件
json_path = os.path.join('/home/panwen.hu/workspace/jian.zhang/EAI/EAI2025/Afford-RDT/data/encode_language/', "pixmo_all_instructions_one_point.json")
with open(json_path, "w", encoding="utf-8") as f:
    json.dump(list(ins_all), f, indent=4)