ds_jiuzhang / deep_search /data_syn /select_remain_data.py
SunSec's picture
Add files using upload-large-folder tool
4ac1fc5 verified
# run_search_o1.py
import os
import json
import time
import re
from tqdm import tqdm
import numpy as np
import torch
import string
from typing import Optional, Tuple, List, Dict
import argparse
def load_json(file_path):
with open(file_path, "r", encoding="utf-8") as file:
data = json.load(file)
print(f"Loaded {len(data)} items from {file_path}")
return data
def save_json(data, file_path):
with open(file_path, "w", encoding="utf-8") as file:
json.dump(data, file, ensure_ascii=False, indent=4)
print(f"Saved {len(data)} items to {file_path}")
file_1 = "/share/project/sunshuang/deep_search/data_syn/data/mixed_data/splits/final_dataset_new/final_selected_dataset.json"
file_2 = "/share/project/sunshuang/deep_search/data_syn/data/mixed_data/splits/merged_tagged_domain_keypoints_keywords_count_hop_remove_2k_data/final_selected_dataset.json"
output_file = "/share/project/sunshuang/deep_search/data_syn/data/mixed_data/splits/merged_tagged_domain_keypoints_keywords_count_hop_remove_2k_data.json"
data_1 = load_json(file_1)
data_2 = load_json(file_2)
id_1 = [item["idx"] for item in data_1]
in_id_1 = 0
not_in_id_1 = 0
in_id_1_id = []
dup_id = []
remain_data = []
for item in data_2:
if item["idx"] not in id_1:
remain_data.append(item)
not_in_id_1 += 1
else:
if item["idx"] in in_id_1_id:
dup_id.append(item["idx"])
else:
in_id_1_id.append(item["idx"])
in_id_1 += 1
print(f"in_id_1: {in_id_1}, not_in_id_1: {not_in_id_1}")
print(f"dup_id: {len(dup_id)}")
# save_json(remain_data, output_file)