import os import pandas as pd import json from tqdm import tqdm from glob import glob from collections import defaultdict # root_path = '/etc/ssd1/jiangzhongtao/baai_embedding_tune/data/all_collect/mteb-classification' # fnames = os.listdir(root_path) # dataset2size = defaultdict(int) # for fname in fnames: # _, name, lang = fname.split('_') # fname = os.path.join(root_path, fname) # with open(fname, 'r') as f: # for line in f: # dataset2size[name] += 1 src = 'mteb-Clustering' # postfix = '_hn' with open(f'/etc/ssd1/jiangzhongtao/baai_embedding_tune/data/all_collect/instructions/{src}_instructions.json') as f: instructions = json.load(f) # src += postfix root_path = f'/etc/ssd1/jiangzhongtao/baai_embedding_tune/data/all_collect/{src}' fnames = os.listdir(root_path) pbar = tqdm() for fname in tqdm(fnames): _, name, lang = fname[:-6].split('_') instruction = instructions[name] f_in = open(os.path.join(root_path, fname), 'r') f_out = open(os.path.join(root_path, fname) + '.inst', 'w') for line in f_in: line = json.loads(line) line['instruction'] = instruction line = json.dumps(line, ensure_ascii=False) f_out.write(line + '\n') pbar.update(1) f_in.close() f_out.close() for fname in tqdm(glob(f'/etc/ssd1/jiangzhongtao/baai_embedding_tune/data/all_collect/{src}/*.jsonl')): os.remove(fname) for fname in tqdm(glob(f'/etc/ssd1/jiangzhongtao/baai_embedding_tune/data/all_collect/{src}/*.jsonl.inst')): os.rename(fname, fname.replace('.inst', ''))