| from utils import parse, write_jsonl_file, read_json_file | |
| import os | |
| def build_wiki_index(wiki_dir): | |
| idx2path = dict() | |
| for wiki_filename in os.listdir(wiki_dir): | |
| wiki_path = os.path.join(wiki_dir, wiki_filename) | |
| wiki_data = read_json_file(wiki_path) | |
| wiki_idx = wiki_data["wikiDocumentIdx"] | |
| idx2path[wiki_idx] = wiki_path | |
| return idx2path | |
| def merge_consecutive_turns(history): | |
| new_history = [] | |
| cur_turn = None | |
| for turn in history: | |
| if cur_turn is None: | |
| cur_turn = turn | |
| cur_turn["docIdx"] = [cur_turn["docIdx"]] | |
| else: | |
| if cur_turn["uid"] == turn["uid"]: | |
| cur_turn["text"] += " " + turn["text"].strip() | |
| if turn["docIdx"] not in cur_turn["docIdx"]: | |
| cur_turn["docIdx"].append(turn["docIdx"]) | |
| else: | |
| new_history.append(cur_turn) | |
| cur_turn = turn | |
| cur_turn["docIdx"] = [cur_turn["docIdx"]] | |
| if cur_turn is not None: | |
| new_history.append(cur_turn) | |
| return new_history | |
| def load_train_files(args): | |
| conv_dir = os.path.join(args.input_dir, "Conversations") | |
| input_dir = os.path.join(conv_dir, "train") | |
| filenames = [filename for filename in os.listdir(input_dir)] | |
| return filenames | |
| def preprocess(args, split, train_files=None): | |
| wiki_dir = os.path.join(args.input_dir, "WikiData") | |
| conv_dir = os.path.join(args.input_dir, "Conversations") | |
| idx2path = build_wiki_index(wiki_dir) | |
| input_dir = os.path.join(conv_dir, split) | |
| if split == "valid": | |
| split = "dev" | |
| output_file = os.path.join(args.output_dir, f"{split}.jsonl") | |
| processed_data = [] | |
| def flip_role(role): | |
| if role == "user1": | |
| return "user2" | |
| elif role == "user2": | |
| return "user1" | |
| else: | |
| raise ValueError(f"Unknown role: {role}") | |
| for filename in os.listdir(input_dir): | |
| if train_files is not None and filename in train_files: | |
| continue | |
| filepath = os.path.join(input_dir, filename) | |
| data = read_json_file(filepath) | |
| history = data["history"] | |
| history = merge_consecutive_turns(history) | |
| dialog = {"turn": "multi", "locale": "en", "dialog": []} | |
| wikipath = idx2path[data["wikiDocumentIdx"]] | |
| wikidata = read_json_file(wikipath) | |
| flip = False | |
| for idx, turn in enumerate(history): | |
| if idx == 0 and turn["uid"] == "user2": | |
| flip = True | |
| dialog["dialog"].append( | |
| { | |
| "roles": [turn["uid"]] if not flip else [flip_role(turn["uid"])], | |
| "utterance": turn["text"], | |
| "knowledge_to_select": list(map(str, turn["docIdx"])), | |
| } | |
| ) | |
| dialog["knowledge"] = {"type": "dict", "value": wikidata} | |
| processed_data.append(dialog) | |
| write_jsonl_file(processed_data, output_file) | |
| if __name__ == "__main__": | |
| args = parse() | |
| train_files = load_train_files(args) | |
| preprocess(args, "train") | |
| preprocess(args, "valid", train_files) | |
| preprocess(args, "test", train_files) | |