satissss's picture
Upload 174 files
47d665b verified
import json
from pathlib import Path
def judge_db_type(instance_id: str):
if instance_id.startswith("bq") or instance_id.startswith("ga"):
return "big_query"
elif instance_id.startswith("sf"):
return "snowflake"
elif instance_id.startswith("local"):
return "sqlite"
return None
def process_schema(table: dict):
format_table = dict()
format_table["db_id"] = table["db_id"]
format_table["db_size"] = table["db_stats"]["No. of columns"]
# format_table["db_type"] = judge_db_type(table["db_type"]) # 可能需要根据数据来判断
format_table["db_type"] = None,
format_table["table_names_original"] = table["table_names_original"]
format_table["column_names_original"] = table["column_names_original"]
format_table["column_types"] = table["column_types"]
format_table["column_descriptions"] = table["column_descriptions"]
# format_table["sample_rows"] = table["sample_rows"]
format_table["table_to_projDataset"] = table["table_to_projDataset"]
return format_table
def process_dataset(dataset: dict):
format_dataset = dict()
format_dataset["instance_id"] = dataset["instance_id"]
format_dataset["db_id"] = dataset["db_id"]
format_dataset["question"] = dataset["question"]
format_dataset["db_type"] = judge_db_type(dataset["instance_id"])
format_dataset["db_size"] = dataset['No. of candidate columns']
format_dataset["query"] = dataset["query"]
format_dataset["external_path"] = dataset['external_knowledge']
return format_dataset
if __name__ == '__main__':
with open("origin_schema.json", 'r', encoding='utf-8') as f:
schema = json.load(f)
with open("origin_dataset.json", 'r', encoding='utf-8') as f:
data = json.load(f)
# 处理 schema
tabel_data = []
for row in schema:
res = process_schema(row)
tabel_data.append(res)
# 处理 dataset
dataset_data = []
db_type_map = dict()
for row in data:
res = process_dataset(row)
dataset_data.append(res)
if res["db_type"] not in db_type_map:
db_type_map[res["db_id"]] = res["db_type"]
for row in tabel_data:
db_type = db_type_map[row["db_id"]]
row["db_type"] = db_type
with open("./snow/dataset.json", 'w', encoding='utf-8') as f:
json.dump(dataset_data, f, ensure_ascii=False, indent=4)
with open("./snow/schema.json", 'w', encoding='utf-8') as f:
json.dump(tabel_data, f, ensure_ascii=False, indent=4)