| from Spider import dump_db_json_schema | |
| import json | |
| import os | |
| import copy | |
| from utils import write_jsonl_file, parse | |
| import shutil | |
| def preprocess(args, split): | |
| data_filepaths = [ | |
| os.path.join( | |
| os.path.join(args.input_dir, "sql_state_tracking"), f"cosql_{split}.json" | |
| ) | |
| ] | |
| db_path = os.path.join(args.input_dir, "database") | |
| out_db_path = os.path.join(args.output_dir, "database") | |
| if not os.path.exists(out_db_path): | |
| shutil.copytree(db_path, out_db_path) | |
| schema_cache = dict() | |
| processed_data = [] | |
| for data_filepath in data_filepaths: | |
| with open(data_filepath, encoding="utf-8") as f: | |
| cosql = json.load(f) | |
| for sample in cosql: | |
| dialog = { | |
| "locale": "en", | |
| "dialog": [], | |
| } | |
| db_id = sample["database_id"] | |
| if db_id not in schema_cache: | |
| schema_cache[db_id] = dump_db_json_schema( | |
| db_path + "/" + db_id + "/" + db_id + ".sqlite", db_id | |
| ) | |
| schema = schema_cache[db_id] | |
| dialog["knowledge"] = { | |
| "type": "dict", | |
| "value": {"db_id": db_id, "schema": schema, "db_path": out_db_path}, | |
| } | |
| final_dialog = copy.deepcopy(dialog) | |
| final_dialog["turn"] = "single" | |
| final_dialog["dialog"].append( | |
| { | |
| "roles": ["USER"], | |
| "utterance": sample["final"]["utterance"] | |
| .replace("``", '"') | |
| .replace("''", '"') | |
| .strip(), | |
| "sql": sample["final"]["query"], | |
| } | |
| ) | |
| if split == "train": | |
| processed_data.append(final_dialog) | |
| dialog["turn"] = "multi" | |
| roles = ["USER", "SYSTEM"] | |
| for _, turn in enumerate(sample["interaction"]): | |
| for idx, utterance in enumerate( | |
| turn["utterance"] | |
| .replace("``", '"') | |
| .replace("''", '"') | |
| .strip() | |
| .split("|") | |
| ): | |
| utterance = utterance.strip() | |
| dialog["dialog"].append( | |
| {"roles": [roles[idx % 2]], "utterance": utterance} | |
| ) | |
| assert dialog["dialog"][-1]["roles"] == ["USER"] | |
| dialog["dialog"][-1]["sql"] = turn["query"] | |
| processed_data.append(dialog) | |
| write_jsonl_file(processed_data, os.path.join(args.output_dir, f"{split}.jsonl")) | |
| if __name__ == "__main__": | |
| args = parse() | |
| preprocess(args, "train") | |
| preprocess(args, "dev") | |
| shutil.copyfile( | |
| os.path.join(args.input_dir, "tables.json"), | |
| os.path.join(args.output_dir, "tables.json"), | |
| ) | |