Kenneth Enevoldsen
Added creation scripts
32e9f98
import json
import pickle
from pathlib import Path
from spacy.tokens import Span
import dacy
from dacy.datasets import dane
def load_examples():
save_path = Path("examples.pkl")
if save_path.exists():
with open(save_path, "rb") as f:
examples = pickle.load(f)
return examples
train, dev, test = dane()
nlp = dacy.load("da_dacy_large_ner_fine_grained-0.1.0")
examples = list(test(nlp)) + list(train(nlp)) + list(dev(nlp))
docs = nlp.pipe([ex.x.text for ex in examples])
for e in examples:
e.predicted = next(docs)
with open("examples.pkl", "wb") as f:
pickle.dump(examples, f)
return examples
def normalize_examples(examples):
label_mapping = {
"PER": "PERSON",
"LOC": "LOCATION",
"ORG": "ORGANIZATION",
"MISC": "MISC",
}
for e in examples:
old_ents = e.y.ents
new_ents = []
for ent in old_ents:
new_label = label_mapping[ent.label_]
new_ent = Span(e.y, start=ent.start, end=ent.end, label=new_label)
new_ents.append(new_ent)
e.y.ents = new_ents
return examples
def example_to_review_format(example) -> dict:
ref = example.y
text = ref.text
tokens = [
{"text": t.text, "start": t.idx, "end": t.idx + len(t), "id": i}
for i, t in enumerate(ref)
]
answer = "accept"
versions = []
v_ref_spans = [
{
"start": s.start_char,
"end": s.end_char,
"label": s.label_,
"token_start": s.start,
"token_end": s.end - 1,
}
for s in ref.ents
]
v_ref = {
"text": text,
"tokens": tokens,
"spans": v_ref_spans,
"answer": answer,
"sessions": ["reference"],
"default": True,
}
versions.append(v_ref)
v_pred_spans = [
{
"start": s.start_char,
"end": s.end_char,
"label": s.label_,
"token_start": s.start,
"token_end": s.end - 1,
}
for s in example.predicted.ents
]
v_pred = {
"text": text,
"tokens": tokens,
"spans": v_pred_spans,
"answer": answer,
"sessions": ["da_dacy_large_ner_fine_grained-0.1.0"],
"default": True,
}
versions.append(v_pred)
return {
"text": text,
"tokens": tokens,
"answer": answer,
"view_id": "ner_manual",
"versions": versions,
}
if __name__ == "__main__":
examples = load_examples()
",".join(set([ent.label_ for e in examples for ent in e.x.ents]))
jsonl_data = [example_to_review_format(e) for e in normalize_examples(examples)]
with open("examples.jsonl", "w") as f:
for json_dict in jsonl_data:
line = json.dumps(json_dict)
f.write(f"{line}\n")
with open("reference.jsonl", "w") as f:
for json_dict in jsonl_data:
line = json.dumps(json_dict["versions"][0])
f.write(f"{line}\n")
with open("predictions.jsonl", "w") as f:
for json_dict in jsonl_data:
line = json.dumps(json_dict["versions"][1])
f.write(f"{line}\n")